import os
from glob import glob
import csv
import cv2
import numpy as np
import matplotlib.pyplot as plt
import random
TRUE_IMG_PATH='./True/'
FALSE_IMG_PATH='./False/'
pixel_size=224
true_imgs=glob(TRUE_IMG_PATH+'*')
false_imgs=glob(FALSE_IMG_PATH+'*')
all_imgs=true_imgs+false_imgs
print(len(true_imgs))
print (len(false_imgs))
len_imgs=len(true_imgs)+len(false_imgs)
x_input=np.array((len_imgs,pixel_size,pixel_size,1))
x_label=[]
true_label=[]
false_label=[]
def read_img_label(img_list,label):
label_list=[]
imgs_array=np.zeros((len(img_list),pixel_size,pixel_size,1))
for i,img in enumerate(img_list):
img_array=cv2.resize(cv2.imread(img,0),(pixel_size,pixel_size))
label_list.append(label)
imgs_array[i,:,:,0]=img_array
label_list=np.array(label_list)
return imgs_array,label_list
true_inputs,true_labels = read_img_label(true_imgs,[0,1])
false_inputs,false_labels = read_img_label(false_imgs,[1,0])
113 46
print (true_inputs.shape)
plt.imshow(true_inputs[10,:,:,0],cmap=plt.cm.gray)
plt.show()
def shuffle_data(a,b,r1):
assert len(a)==len(b)
r=list(range(len(a)))
random.shuffle(r,lambda: r1)
p=np.array(r)
return a[p],b[p]
data=np.vstack((true_inputs,false_inputs))
labels=np.vstack((true_labels,false_labels))
data,labels=shuffle_data(data,labels,0.1)
split_point=int(round(0.8*len(data)))
(x_train,x_val)=(data[:split_point],data[split_point:])
(y_train,y_val)= (labels[:split_point],labels[split_point:])
# (x_train, y_train), (x_val, y_val) = mnist.load_data()
(113, 224, 224, 1)
from keras.applications import ResNet50
from vis.utils import utils
from keras import activations
from keras.layers import Dense, GlobalAveragePooling2D,BatchNormalization,Activation, Flatten,AveragePooling2D, Input
from keras.models import Model
# Hide warnings on Jupyter Notebook
import warnings
warnings.filterwarnings('ignore')
# Build the ResNet50 network with ImageNet weights
input_shape=(224,224, 1)
model = ResNet50(weights=None, include_top=False, input_shape=input_shape,classes =2)
# model.summary()
# input = Input(shape=input_shape)
input = model.input
x = model.output
x = Activation('relu')(x)
x = BatchNormalization()(x)
x = AveragePooling2D()(x)
print (x.shape)
x = Flatten()(x)
dense = Dense(2,activation='softmax')(x)
model = Model(inputs=input, outputs=dense)
Using TensorFlow backend.
(?, 3, 3, 2048)
import numpy as np
import keras
from keras.datasets import mnist
from keras.models import Sequential, Model
from keras.layers import Dense, Dropout, Flatten, Activation, Input
from keras.layers import Conv2D, MaxPooling2D
from keras import backend as K
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Conv2D, MaxPooling2D
import os
from keras.layers.normalization import BatchNormalization
from keras.callbacks import ReduceLROnPlateau, CSVLogger, EarlyStopping
from keras.datasets import cifar10
from keras.preprocessing.image import ImageDataGenerator
from keras.utils import np_utils
from keras.callbacks import ReduceLROnPlateau, CSVLogger, EarlyStopping
import numpy as np
import resnet
os.environ["CUDA_VISIBLE_DEVICES"] = '0'
lr_reducer = ReduceLROnPlateau(factor=np.sqrt(0.1), cooldown=0, patience=5, min_lr=0.5e-6)
early_stopper = EarlyStopping(min_delta=0.001, patience=10)
# csv_logger = CSVLogger('resnet18_cifar10.csv')
batch_size = 32
nb_classes = 2
nb_epoch = 250
data_augmentation = True
# input image dimensions
img_rows, img_cols = 224, 224
# The CIFAR10 images are RGB.
img_channels = 1
model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
if not data_augmentation:
print('Not using data augmentation.')
model.fit(x_train, y_train,
batch_size=batch_size,
nb_epoch=nb_epoch,
validation_data=(x_val, y_val),
shuffle=True,
callbacks=[lr_reducer, early_stopper])
else:
print('Using real-time data augmentation.')
# This will do preprocessing and realtime data augmentation:
datagen = ImageDataGenerator(
featurewise_center=False, # set input mean to 0 over the dataset
samplewise_center=False, # set each sample mean to 0
featurewise_std_normalization=False, # divide inputs by std of the dataset
samplewise_std_normalization=False, # divide each input by its std
zca_whitening=False, # apply ZCA whitening
rotation_range=0, # randomly rotate images in the range (degrees, 0 to 180)
width_shift_range=0.1, # randomly shift images horizontally (fraction of total width)
height_shift_range=0.1, # randomly shift images vertically (fraction of total height)
horizontal_flip=True, # randomly flip images
vertical_flip=False) # randomly flip images
# Compute quantities required for featurewise normalization
# (std, mean, and principal components if ZCA whitening is applied).
datagen.fit(x_train)
# Fit the model on the batches generated by datagen.flow().
model.fit_generator(datagen.flow(x_train, y_train, batch_size=batch_size),
steps_per_epoch=x_train.shape[0] // batch_size,
validation_data=(x_val, y_val),
epochs=nb_epoch, verbose=1, max_q_size=100,
callbacks=[lr_reducer])
Using real-time data augmentation. Epoch 1/250 3/3 [==============================] - 12s 4s/step - loss: 1.6029 - acc: 0.6625 - val_loss: 13.2023 - val_acc: 0.1250 Epoch 2/250 3/3 [==============================] - 1s 243ms/step - loss: 1.2281 - acc: 0.8421 - val_loss: 13.6004 - val_acc: 0.1562 Epoch 3/250 3/3 [==============================] - 1s 187ms/step - loss: 1.1374 - acc: 0.8333 - val_loss: 13.5685 - val_acc: 0.1250 Epoch 4/250 3/3 [==============================] - 1s 188ms/step - loss: 1.0393 - acc: 0.8739 - val_loss: 13.3425 - val_acc: 0.1562 Epoch 5/250 3/3 [==============================] - 1s 186ms/step - loss: 0.6721 - acc: 0.8739 - val_loss: 13.9465 - val_acc: 0.1250 Epoch 6/250 3/3 [==============================] - 1s 188ms/step - loss: 0.4732 - acc: 0.8951 - val_loss: 13.9365 - val_acc: 0.1250 Epoch 7/250 3/3 [==============================] - 1s 187ms/step - loss: 0.6108 - acc: 0.9271 - val_loss: 13.3865 - val_acc: 0.1250 Epoch 8/250 3/3 [==============================] - 1s 188ms/step - loss: 0.5390 - acc: 0.9264 - val_loss: 13.1630 - val_acc: 0.1250 Epoch 9/250 3/3 [==============================] - 1s 185ms/step - loss: 0.3506 - acc: 0.9582 - val_loss: 11.5725 - val_acc: 0.1250 Epoch 10/250 3/3 [==============================] - 1s 187ms/step - loss: 0.1625 - acc: 0.9261 - val_loss: 10.2172 - val_acc: 0.1875 Epoch 11/250 3/3 [==============================] - 1s 190ms/step - loss: 0.4151 - acc: 0.9062 - val_loss: 7.4806 - val_acc: 0.2812 Epoch 12/250 3/3 [==============================] - 1s 187ms/step - loss: 0.3085 - acc: 0.9259 - val_loss: 5.8677 - val_acc: 0.4062 Epoch 13/250 3/3 [==============================] - 1s 187ms/step - loss: 0.3048 - acc: 0.9371 - val_loss: 5.4077 - val_acc: 0.5000 Epoch 14/250 3/3 [==============================] - 1s 188ms/step - loss: 0.2418 - acc: 0.9580 - val_loss: 5.4006 - val_acc: 0.5312 Epoch 15/250 3/3 [==============================] - 1s 190ms/step - loss: 0.1118 - acc: 0.9375 - val_loss: 4.2844 - val_acc: 0.5312 Epoch 16/250 3/3 [==============================] - 1s 187ms/step - loss: 0.4546 - acc: 0.8846 - val_loss: 3.4438 - val_acc: 0.5000 Epoch 17/250 3/3 [==============================] - 1s 188ms/step - loss: 0.3625 - acc: 0.8958 - val_loss: 3.0097 - val_acc: 0.5938 Epoch 18/250 3/3 [==============================] - 1s 185ms/step - loss: 0.3012 - acc: 0.9254 - val_loss: 3.3761 - val_acc: 0.5000 Epoch 19/250 3/3 [==============================] - 1s 189ms/step - loss: 0.2005 - acc: 0.9167 - val_loss: 3.2513 - val_acc: 0.5312 Epoch 20/250 3/3 [==============================] - 1s 188ms/step - loss: 0.2299 - acc: 0.9577 - val_loss: 2.9511 - val_acc: 0.6875 Epoch 21/250 3/3 [==============================] - 1s 185ms/step - loss: 0.2138 - acc: 0.9791 - val_loss: 2.9727 - val_acc: 0.6875 Epoch 22/250 3/3 [==============================] - 1s 188ms/step - loss: 0.1045 - acc: 0.9470 - val_loss: 2.8080 - val_acc: 0.6875 Epoch 23/250 3/3 [==============================] - 1s 189ms/step - loss: 0.2454 - acc: 0.9479 - val_loss: 2.6158 - val_acc: 0.6875 Epoch 24/250 3/3 [==============================] - 1s 187ms/step - loss: 0.2521 - acc: 0.9684 - val_loss: 2.4332 - val_acc: 0.6875 Epoch 25/250 3/3 [==============================] - 1s 188ms/step - loss: 0.2944 - acc: 0.9366 - val_loss: 2.2106 - val_acc: 0.6875 Epoch 26/250 3/3 [==============================] - 1s 190ms/step - loss: 0.0843 - acc: 0.9682 - val_loss: 2.3073 - val_acc: 0.6875 Epoch 27/250 3/3 [==============================] - 1s 189ms/step - loss: 0.2389 - acc: 0.9475 - val_loss: 2.3587 - val_acc: 0.7187 Epoch 28/250 3/3 [==============================] - 1s 189ms/step - loss: 0.2898 - acc: 0.9479 - val_loss: 2.2320 - val_acc: 0.7500 Epoch 29/250 3/3 [==============================] - 1s 187ms/step - loss: 0.3702 - acc: 0.9684 - val_loss: 2.2138 - val_acc: 0.7500 Epoch 30/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0673 - acc: 0.9684 - val_loss: 2.0722 - val_acc: 0.7500 Epoch 31/250 3/3 [==============================] - 1s 190ms/step - loss: 0.2514 - acc: 0.9688 - val_loss: 1.6406 - val_acc: 0.7500 Epoch 32/250 3/3 [==============================] - 1s 186ms/step - loss: 0.3011 - acc: 0.9473 - val_loss: 1.4230 - val_acc: 0.7812 Epoch 33/250 3/3 [==============================] - 1s 185ms/step - loss: 0.0584 - acc: 0.9789 - val_loss: 1.5884 - val_acc: 0.8125 Epoch 34/250 3/3 [==============================] - 1s 187ms/step - loss: 0.1761 - acc: 0.9375 - val_loss: 1.8783 - val_acc: 0.7500 Epoch 35/250 3/3 [==============================] - 1s 187ms/step - loss: 0.1082 - acc: 0.9266 - val_loss: 2.0066 - val_acc: 0.6250 Epoch 36/250 3/3 [==============================] - 1s 187ms/step - loss: 0.1183 - acc: 0.9475 - val_loss: 2.0361 - val_acc: 0.7188 Epoch 37/250 3/3 [==============================] - 1s 188ms/step - loss: 0.2067 - acc: 0.9366 - val_loss: 1.6249 - val_acc: 0.7500 Epoch 38/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0807 - acc: 0.9583 - val_loss: 1.5640 - val_acc: 0.7500 Epoch 39/250 3/3 [==============================] - 1s 187ms/step - loss: 0.1193 - acc: 0.9686 - val_loss: 1.4577 - val_acc: 0.7812 Epoch 40/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0647 - acc: 0.9792 - val_loss: 1.4237 - val_acc: 0.7812 Epoch 41/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0532 - acc: 0.9896 - val_loss: 1.4284 - val_acc: 0.7812 Epoch 42/250 3/3 [==============================] - 1s 185ms/step - loss: 0.0382 - acc: 0.9896 - val_loss: 1.4127 - val_acc: 0.7812 Epoch 43/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0115 - acc: 1.0000 - val_loss: 1.3802 - val_acc: 0.8125 Epoch 44/250 3/3 [==============================] - 1s 186ms/step - loss: 0.0120 - acc: 1.0000 - val_loss: 1.3483 - val_acc: 0.8125 Epoch 45/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0556 - acc: 0.9896 - val_loss: 1.1895 - val_acc: 0.8125 Epoch 46/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0226 - acc: 0.9896 - val_loss: 0.9899 - val_acc: 0.8438 Epoch 47/250 3/3 [==============================] - 1s 186ms/step - loss: 0.0185 - acc: 1.0000 - val_loss: 0.8778 - val_acc: 0.8438 Epoch 48/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0356 - acc: 0.9893 - val_loss: 0.8108 - val_acc: 0.8438 Epoch 49/250 3/3 [==============================] - 1s 186ms/step - loss: 0.0880 - acc: 0.9791 - val_loss: 0.8012 - val_acc: 0.8438 Epoch 50/250 3/3 [==============================] - 1s 190ms/step - loss: 0.0718 - acc: 0.9792 - val_loss: 0.8413 - val_acc: 0.8125 Epoch 51/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0150 - acc: 1.0000 - val_loss: 0.9073 - val_acc: 0.8125 Epoch 52/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0177 - acc: 0.9896 - val_loss: 0.9688 - val_acc: 0.8125 Epoch 53/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0405 - acc: 0.9893 - val_loss: 0.9769 - val_acc: 0.8125 Epoch 54/250 3/3 [==============================] - 1s 185ms/step - loss: 0.0411 - acc: 0.9789 - val_loss: 0.9595 - val_acc: 0.8125 Epoch 55/250 3/3 [==============================] - 1s 190ms/step - loss: 0.0906 - acc: 0.9792 - val_loss: 0.9523 - val_acc: 0.8125 Epoch 56/250 3/3 [==============================] - 1s 190ms/step - loss: 0.0219 - acc: 1.0000 - val_loss: 0.9446 - val_acc: 0.8125 Epoch 57/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0289 - acc: 0.9791 - val_loss: 0.9447 - val_acc: 0.8125 Epoch 58/250 3/3 [==============================] - 1s 186ms/step - loss: 0.0124 - acc: 1.0000 - val_loss: 0.9429 - val_acc: 0.8125 Epoch 59/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0082 - acc: 1.0000 - val_loss: 0.9484 - val_acc: 0.8125 Epoch 60/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0063 - acc: 1.0000 - val_loss: 0.9513 - val_acc: 0.8125 Epoch 61/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0412 - acc: 0.9789 - val_loss: 0.9558 - val_acc: 0.8125 Epoch 62/250 3/3 [==============================] - 1s 190ms/step - loss: 0.0149 - acc: 0.9896 - val_loss: 0.9565 - val_acc: 0.8125 Epoch 63/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0167 - acc: 0.9896 - val_loss: 0.9532 - val_acc: 0.8125 Epoch 64/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0096 - acc: 1.0000 - val_loss: 0.9451 - val_acc: 0.8125 Epoch 65/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0140 - acc: 1.0000 - val_loss: 0.9441 - val_acc: 0.8125 Epoch 66/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0533 - acc: 0.9896 - val_loss: 0.9434 - val_acc: 0.8125 Epoch 67/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0978 - acc: 0.9786 - val_loss: 0.9429 - val_acc: 0.8125 Epoch 68/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0169 - acc: 1.0000 - val_loss: 0.9421 - val_acc: 0.8125 Epoch 69/250 3/3 [==============================] - 1s 186ms/step - loss: 0.0357 - acc: 0.9789 - val_loss: 0.9384 - val_acc: 0.8125 Epoch 70/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0193 - acc: 1.0000 - val_loss: 0.9391 - val_acc: 0.8125 Epoch 71/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0163 - acc: 0.9896 - val_loss: 0.9397 - val_acc: 0.8125 Epoch 72/250 3/3 [==============================] - 1s 190ms/step - loss: 0.0193 - acc: 1.0000 - val_loss: 0.9383 - val_acc: 0.8125 Epoch 73/250 3/3 [==============================] - 1s 186ms/step - loss: 0.0280 - acc: 0.9789 - val_loss: 0.9377 - val_acc: 0.8125 Epoch 74/250 3/3 [==============================] - 1s 186ms/step - loss: 0.0169 - acc: 0.9896 - val_loss: 0.9412 - val_acc: 0.8125 Epoch 75/250 3/3 [==============================] - 1s 191ms/step - loss: 0.0389 - acc: 0.9896 - val_loss: 0.9444 - val_acc: 0.8125 Epoch 76/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0197 - acc: 0.9896 - val_loss: 0.9446 - val_acc: 0.8125 Epoch 77/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0547 - acc: 0.9684 - val_loss: 0.9445 - val_acc: 0.8125 Epoch 78/250 3/3 [==============================] - 1s 191ms/step - loss: 0.0037 - acc: 1.0000 - val_loss: 0.9453 - val_acc: 0.8125 Epoch 79/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0299 - acc: 0.9896 - val_loss: 0.9497 - val_acc: 0.8125 Epoch 80/250 3/3 [==============================] - 1s 191ms/step - loss: 0.1711 - acc: 0.9368 - val_loss: 0.9532 - val_acc: 0.8125 Epoch 81/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0114 - acc: 1.0000 - val_loss: 0.9509 - val_acc: 0.8125 Epoch 82/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0613 - acc: 0.9893 - val_loss: 0.9542 - val_acc: 0.8125 Epoch 83/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0451 - acc: 0.9896 - val_loss: 0.9587 - val_acc: 0.8125 Epoch 84/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0382 - acc: 0.9791 - val_loss: 0.9611 - val_acc: 0.8125 Epoch 85/250 3/3 [==============================] - 1s 186ms/step - loss: 0.0054 - acc: 1.0000 - val_loss: 0.9624 - val_acc: 0.8125 Epoch 86/250 3/3 [==============================] - 1s 191ms/step - loss: 0.0144 - acc: 1.0000 - val_loss: 0.9638 - val_acc: 0.8125 Epoch 87/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0194 - acc: 1.0000 - val_loss: 0.9641 - val_acc: 0.8125 Epoch 88/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0142 - acc: 1.0000 - val_loss: 0.9686 - val_acc: 0.8125 Epoch 89/250 3/3 [==============================] - 1s 186ms/step - loss: 0.0123 - acc: 1.0000 - val_loss: 0.9685 - val_acc: 0.8125 Epoch 90/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0428 - acc: 0.9896 - val_loss: 0.9734 - val_acc: 0.8125 Epoch 91/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0173 - acc: 1.0000 - val_loss: 0.9758 - val_acc: 0.8125 Epoch 92/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0136 - acc: 1.0000 - val_loss: 0.9760 - val_acc: 0.8125 Epoch 93/250 3/3 [==============================] - 1s 186ms/step - loss: 0.0176 - acc: 1.0000 - val_loss: 0.9761 - val_acc: 0.8125 Epoch 94/250 3/3 [==============================] - 1s 190ms/step - loss: 0.0143 - acc: 1.0000 - val_loss: 0.9780 - val_acc: 0.8125 Epoch 95/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0154 - acc: 1.0000 - val_loss: 0.9820 - val_acc: 0.8125 Epoch 96/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0094 - acc: 1.0000 - val_loss: 0.9839 - val_acc: 0.8125 Epoch 97/250 3/3 [==============================] - 1s 188ms/step - loss: 0.1611 - acc: 0.9679 - val_loss: 0.9879 - val_acc: 0.8125 Epoch 98/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0090 - acc: 1.0000 - val_loss: 0.9877 - val_acc: 0.8125 Epoch 99/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0177 - acc: 1.0000 - val_loss: 0.9866 - val_acc: 0.8125 Epoch 100/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0142 - acc: 0.9896 - val_loss: 0.9898 - val_acc: 0.8125 Epoch 101/250 3/3 [==============================] - 1s 186ms/step - loss: 0.0210 - acc: 1.0000 - val_loss: 0.9932 - val_acc: 0.8125 Epoch 102/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0258 - acc: 0.9792 - val_loss: 0.9950 - val_acc: 0.8125 Epoch 103/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0590 - acc: 0.9684 - val_loss: 0.9943 - val_acc: 0.8125 Epoch 104/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0314 - acc: 0.9896 - val_loss: 0.9972 - val_acc: 0.8125 Epoch 105/250 3/3 [==============================] - 1s 186ms/step - loss: 0.0275 - acc: 0.9789 - val_loss: 0.9993 - val_acc: 0.7812 Epoch 106/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0115 - acc: 1.0000 - val_loss: 1.0025 - val_acc: 0.7812 Epoch 107/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0127 - acc: 1.0000 - val_loss: 1.0032 - val_acc: 0.7812 Epoch 108/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0043 - acc: 1.0000 - val_loss: 1.0016 - val_acc: 0.7812 Epoch 109/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0181 - acc: 1.0000 - val_loss: 1.0044 - val_acc: 0.7812 Epoch 110/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0456 - acc: 0.9682 - val_loss: 1.0042 - val_acc: 0.7812 Epoch 111/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0145 - acc: 1.0000 - val_loss: 1.0038 - val_acc: 0.7812 Epoch 112/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0226 - acc: 1.0000 - val_loss: 1.0054 - val_acc: 0.7812 Epoch 113/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0134 - acc: 1.0000 - val_loss: 1.0076 - val_acc: 0.7812 Epoch 114/250 3/3 [==============================] - 1s 185ms/step - loss: 0.0116 - acc: 1.0000 - val_loss: 1.0051 - val_acc: 0.7812 Epoch 115/250 3/3 [==============================] - 1s 191ms/step - loss: 0.0250 - acc: 0.9896 - val_loss: 1.0070 - val_acc: 0.7812 Epoch 116/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0154 - acc: 0.9896 - val_loss: 1.0078 - val_acc: 0.7812 Epoch 117/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0189 - acc: 1.0000 - val_loss: 1.0099 - val_acc: 0.7812 Epoch 118/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0275 - acc: 1.0000 - val_loss: 1.0117 - val_acc: 0.7812 Epoch 119/250 3/3 [==============================] - 1s 186ms/step - loss: 0.0117 - acc: 0.9896 - val_loss: 1.0099 - val_acc: 0.7812 Epoch 120/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0160 - acc: 1.0000 - val_loss: 1.0117 - val_acc: 0.7812 Epoch 121/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0169 - acc: 1.0000 - val_loss: 1.0136 - val_acc: 0.7812 Epoch 122/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0623 - acc: 0.9788 - val_loss: 1.0168 - val_acc: 0.7812 Epoch 123/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0538 - acc: 0.9896 - val_loss: 1.0135 - val_acc: 0.7812 Epoch 124/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0086 - acc: 1.0000 - val_loss: 1.0153 - val_acc: 0.7812 Epoch 125/250 3/3 [==============================] - 1s 186ms/step - loss: 0.0116 - acc: 1.0000 - val_loss: 1.0163 - val_acc: 0.7812 Epoch 126/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0458 - acc: 0.9791 - val_loss: 1.0151 - val_acc: 0.7812 Epoch 127/250 3/3 [==============================] - 1s 190ms/step - loss: 0.0483 - acc: 0.9893 - val_loss: 1.0146 - val_acc: 0.7812 Epoch 128/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0137 - acc: 1.0000 - val_loss: 1.0187 - val_acc: 0.7812 Epoch 129/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0146 - acc: 0.9896 - val_loss: 1.0203 - val_acc: 0.7812 Epoch 130/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0322 - acc: 0.9893 - val_loss: 1.0219 - val_acc: 0.7812 Epoch 131/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0049 - acc: 1.0000 - val_loss: 1.0268 - val_acc: 0.7812 Epoch 132/250 3/3 [==============================] - 1s 186ms/step - loss: 0.0081 - acc: 1.0000 - val_loss: 1.0260 - val_acc: 0.7812 Epoch 133/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0254 - acc: 1.0000 - val_loss: 1.0281 - val_acc: 0.7812 Epoch 134/250 3/3 [==============================] - 1s 191ms/step - loss: 0.0049 - acc: 1.0000 - val_loss: 1.0299 - val_acc: 0.7812 Epoch 135/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0225 - acc: 1.0000 - val_loss: 1.0304 - val_acc: 0.7812 Epoch 136/250 3/3 [==============================] - 1s 190ms/step - loss: 0.0093 - acc: 1.0000 - val_loss: 1.0316 - val_acc: 0.7812 Epoch 137/250 3/3 [==============================] - 1s 186ms/step - loss: 0.0645 - acc: 0.9580 - val_loss: 1.0342 - val_acc: 0.7812 Epoch 138/250 3/3 [==============================] - 1s 190ms/step - loss: 0.0224 - acc: 1.0000 - val_loss: 1.0336 - val_acc: 0.7812 Epoch 139/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0415 - acc: 0.9896 - val_loss: 1.0318 - val_acc: 0.7812 Epoch 140/250 3/3 [==============================] - 1s 190ms/step - loss: 0.0418 - acc: 0.9896 - val_loss: 1.0346 - val_acc: 0.7812 Epoch 141/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0597 - acc: 0.9896 - val_loss: 1.0332 - val_acc: 0.7812 Epoch 142/250 3/3 [==============================] - 1s 190ms/step - loss: 0.0198 - acc: 0.9896 - val_loss: 1.0301 - val_acc: 0.7812 Epoch 143/250 3/3 [==============================] - 1s 189ms/step - loss: 0.1144 - acc: 0.9688 - val_loss: 1.0357 - val_acc: 0.7812 Epoch 144/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0315 - acc: 0.9896 - val_loss: 1.0361 - val_acc: 0.7812 Epoch 145/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0241 - acc: 1.0000 - val_loss: 1.0385 - val_acc: 0.7812 Epoch 146/250 3/3 [==============================] - 1s 185ms/step - loss: 0.0324 - acc: 0.9788 - val_loss: 1.0359 - val_acc: 0.7812 Epoch 147/250 3/3 [==============================] - 1s 190ms/step - loss: 0.0305 - acc: 0.9896 - val_loss: 1.0304 - val_acc: 0.7812 Epoch 148/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0101 - acc: 1.0000 - val_loss: 1.0299 - val_acc: 0.7812 Epoch 149/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0488 - acc: 0.9792 - val_loss: 1.0296 - val_acc: 0.7812 Epoch 150/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0354 - acc: 1.0000 - val_loss: 1.0318 - val_acc: 0.7812 Epoch 151/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0181 - acc: 1.0000 - val_loss: 1.0318 - val_acc: 0.7812 Epoch 152/250 3/3 [==============================] - 1s 191ms/step - loss: 0.0102 - acc: 1.0000 - val_loss: 1.0303 - val_acc: 0.7812 Epoch 153/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0440 - acc: 0.9896 - val_loss: 1.0318 - val_acc: 0.7812 Epoch 154/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0424 - acc: 0.9896 - val_loss: 1.0293 - val_acc: 0.7812 Epoch 155/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0296 - acc: 0.9896 - val_loss: 1.0286 - val_acc: 0.7812 Epoch 156/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0476 - acc: 0.9791 - val_loss: 1.0278 - val_acc: 0.7812 Epoch 157/250 3/3 [==============================] - 1s 186ms/step - loss: 0.0233 - acc: 0.9896 - val_loss: 1.0270 - val_acc: 0.7812 Epoch 158/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0541 - acc: 0.9789 - val_loss: 1.0247 - val_acc: 0.7812 Epoch 159/250 3/3 [==============================] - 1s 190ms/step - loss: 0.1211 - acc: 0.9580 - val_loss: 1.0273 - val_acc: 0.7812 Epoch 160/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0497 - acc: 0.9896 - val_loss: 1.0245 - val_acc: 0.7812 Epoch 161/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0073 - acc: 1.0000 - val_loss: 1.0246 - val_acc: 0.7812 Epoch 162/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0105 - acc: 1.0000 - val_loss: 1.0245 - val_acc: 0.7812 Epoch 163/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0120 - acc: 1.0000 - val_loss: 1.0241 - val_acc: 0.7812 Epoch 164/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0378 - acc: 0.9896 - val_loss: 1.0279 - val_acc: 0.7812 Epoch 165/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0148 - acc: 0.9896 - val_loss: 1.0298 - val_acc: 0.7812 Epoch 166/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0199 - acc: 0.9896 - val_loss: 1.0293 - val_acc: 0.7812 Epoch 167/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0175 - acc: 1.0000 - val_loss: 1.0286 - val_acc: 0.7812 Epoch 168/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0145 - acc: 1.0000 - val_loss: 1.0303 - val_acc: 0.7812 Epoch 169/250 3/3 [==============================] - 1s 186ms/step - loss: 0.0057 - acc: 1.0000 - val_loss: 1.0345 - val_acc: 0.7812 Epoch 170/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0114 - acc: 1.0000 - val_loss: 1.0320 - val_acc: 0.7812 Epoch 171/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0304 - acc: 0.9789 - val_loss: 1.0337 - val_acc: 0.7812 Epoch 172/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0086 - acc: 1.0000 - val_loss: 1.0334 - val_acc: 0.7812 Epoch 173/250 3/3 [==============================] - 1s 186ms/step - loss: 0.0200 - acc: 0.9893 - val_loss: 1.0346 - val_acc: 0.7812 Epoch 174/250 3/3 [==============================] - 1s 190ms/step - loss: 0.0170 - acc: 1.0000 - val_loss: 1.0336 - val_acc: 0.7812 Epoch 175/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0104 - acc: 1.0000 - val_loss: 1.0354 - val_acc: 0.7812 Epoch 176/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0072 - acc: 1.0000 - val_loss: 1.0341 - val_acc: 0.7812 Epoch 177/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0282 - acc: 0.9896 - val_loss: 1.0373 - val_acc: 0.7812 Epoch 178/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0165 - acc: 1.0000 - val_loss: 1.0382 - val_acc: 0.7812 Epoch 179/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0114 - acc: 1.0000 - val_loss: 1.0394 - val_acc: 0.7812 Epoch 180/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0194 - acc: 0.9893 - val_loss: 1.0391 - val_acc: 0.7812 Epoch 181/250 3/3 [==============================] - 1s 186ms/step - loss: 0.0185 - acc: 1.0000 - val_loss: 1.0383 - val_acc: 0.7812 Epoch 182/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0349 - acc: 0.9896 - val_loss: 1.0413 - val_acc: 0.7812 Epoch 183/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0265 - acc: 0.9893 - val_loss: 1.0412 - val_acc: 0.7812 Epoch 184/250 3/3 [==============================] - 1s 190ms/step - loss: 0.0132 - acc: 1.0000 - val_loss: 1.0429 - val_acc: 0.7812 Epoch 185/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0041 - acc: 1.0000 - val_loss: 1.0429 - val_acc: 0.7812 Epoch 186/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0445 - acc: 0.9789 - val_loss: 1.0443 - val_acc: 0.7812 Epoch 187/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0082 - acc: 1.0000 - val_loss: 1.0445 - val_acc: 0.7812 Epoch 188/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0178 - acc: 1.0000 - val_loss: 1.0444 - val_acc: 0.7812 Epoch 189/250 3/3 [==============================] - 1s 186ms/step - loss: 0.0431 - acc: 0.9684 - val_loss: 1.0487 - val_acc: 0.7812 Epoch 190/250 3/3 [==============================] - 1s 191ms/step - loss: 0.0227 - acc: 1.0000 - val_loss: 1.0468 - val_acc: 0.7812 Epoch 191/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0244 - acc: 0.9896 - val_loss: 1.0499 - val_acc: 0.7812 Epoch 192/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0152 - acc: 0.9896 - val_loss: 1.0480 - val_acc: 0.7812 Epoch 193/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0080 - acc: 1.0000 - val_loss: 1.0478 - val_acc: 0.7812 Epoch 194/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0116 - acc: 1.0000 - val_loss: 1.0514 - val_acc: 0.7812 Epoch 195/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0137 - acc: 1.0000 - val_loss: 1.0517 - val_acc: 0.7812 Epoch 196/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0099 - acc: 1.0000 - val_loss: 1.0509 - val_acc: 0.7812 Epoch 197/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0415 - acc: 0.9792 - val_loss: 1.0543 - val_acc: 0.7812 Epoch 198/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0125 - acc: 1.0000 - val_loss: 1.0511 - val_acc: 0.7812 Epoch 199/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0438 - acc: 0.9789 - val_loss: 1.0552 - val_acc: 0.7812 Epoch 200/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0227 - acc: 0.9896 - val_loss: 1.0541 - val_acc: 0.7812 Epoch 201/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0427 - acc: 0.9896 - val_loss: 1.0537 - val_acc: 0.7812 Epoch 202/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0202 - acc: 0.9893 - val_loss: 1.0515 - val_acc: 0.7812 Epoch 203/250 3/3 [==============================] - 1s 190ms/step - loss: 0.0357 - acc: 0.9896 - val_loss: 1.0505 - val_acc: 0.7812 Epoch 204/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0254 - acc: 0.9896 - val_loss: 1.0529 - val_acc: 0.7812 Epoch 205/250 3/3 [==============================] - 1s 186ms/step - loss: 0.0245 - acc: 0.9896 - val_loss: 1.0532 - val_acc: 0.7812 Epoch 206/250 3/3 [==============================] - 1s 190ms/step - loss: 0.0175 - acc: 1.0000 - val_loss: 1.0519 - val_acc: 0.7812 Epoch 207/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0303 - acc: 0.9896 - val_loss: 1.0564 - val_acc: 0.7812 Epoch 208/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0270 - acc: 0.9896 - val_loss: 1.0549 - val_acc: 0.7812 Epoch 209/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0311 - acc: 0.9896 - val_loss: 1.0568 - val_acc: 0.7812 Epoch 210/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0150 - acc: 0.9893 - val_loss: 1.0595 - val_acc: 0.7812 Epoch 211/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0212 - acc: 1.0000 - val_loss: 1.0550 - val_acc: 0.7812 Epoch 212/250 3/3 [==============================] - 1s 190ms/step - loss: 0.0224 - acc: 0.9893 - val_loss: 1.0577 - val_acc: 0.7812 Epoch 213/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0275 - acc: 0.9896 - val_loss: 1.0568 - val_acc: 0.7812 Epoch 214/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0134 - acc: 1.0000 - val_loss: 1.0599 - val_acc: 0.7812 Epoch 215/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0129 - acc: 1.0000 - val_loss: 1.0607 - val_acc: 0.7812 Epoch 216/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0067 - acc: 1.0000 - val_loss: 1.0594 - val_acc: 0.7812 Epoch 217/250 3/3 [==============================] - 1s 186ms/step - loss: 0.0161 - acc: 1.0000 - val_loss: 1.0592 - val_acc: 0.7812 Epoch 218/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0117 - acc: 1.0000 - val_loss: 1.0604 - val_acc: 0.7812 Epoch 219/250 3/3 [==============================] - 1s 190ms/step - loss: 0.0261 - acc: 0.9896 - val_loss: 1.0597 - val_acc: 0.7812 Epoch 220/250 3/3 [==============================] - 1s 190ms/step - loss: 0.0211 - acc: 1.0000 - val_loss: 1.0579 - val_acc: 0.7812 Epoch 221/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0105 - acc: 1.0000 - val_loss: 1.0574 - val_acc: 0.7812 Epoch 222/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0146 - acc: 0.9896 - val_loss: 1.0592 - val_acc: 0.7812 Epoch 223/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0228 - acc: 0.9896 - val_loss: 1.0581 - val_acc: 0.7812 Epoch 224/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0102 - acc: 1.0000 - val_loss: 1.0597 - val_acc: 0.7812 Epoch 225/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0058 - acc: 1.0000 - val_loss: 1.0582 - val_acc: 0.7812 Epoch 226/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0139 - acc: 1.0000 - val_loss: 1.0582 - val_acc: 0.7812 Epoch 227/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0103 - acc: 1.0000 - val_loss: 1.0571 - val_acc: 0.7812 Epoch 228/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0135 - acc: 1.0000 - val_loss: 1.0549 - val_acc: 0.7812 Epoch 229/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0157 - acc: 1.0000 - val_loss: 1.0539 - val_acc: 0.7812 Epoch 230/250 3/3 [==============================] - 1s 190ms/step - loss: 0.0333 - acc: 0.9893 - val_loss: 1.0536 - val_acc: 0.7812 Epoch 231/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0114 - acc: 1.0000 - val_loss: 1.0487 - val_acc: 0.7812 Epoch 232/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0535 - acc: 0.9789 - val_loss: 1.0537 - val_acc: 0.7812 Epoch 233/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0209 - acc: 0.9896 - val_loss: 1.0557 - val_acc: 0.7812 Epoch 234/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0132 - acc: 1.0000 - val_loss: 1.0556 - val_acc: 0.7812 Epoch 235/250 3/3 [==============================] - 1s 191ms/step - loss: 0.0243 - acc: 0.9896 - val_loss: 1.0548 - val_acc: 0.7812 Epoch 236/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0237 - acc: 0.9896 - val_loss: 1.0570 - val_acc: 0.7812 Epoch 237/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0152 - acc: 1.0000 - val_loss: 1.0589 - val_acc: 0.7812 Epoch 238/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0241 - acc: 0.9896 - val_loss: 1.0593 - val_acc: 0.7812 Epoch 239/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0034 - acc: 1.0000 - val_loss: 1.0629 - val_acc: 0.7812 Epoch 240/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0178 - acc: 1.0000 - val_loss: 1.0634 - val_acc: 0.7812 Epoch 241/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0173 - acc: 1.0000 - val_loss: 1.0617 - val_acc: 0.7812 Epoch 242/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0214 - acc: 0.9896 - val_loss: 1.0624 - val_acc: 0.7812 Epoch 243/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0192 - acc: 1.0000 - val_loss: 1.0668 - val_acc: 0.7812 Epoch 244/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0135 - acc: 1.0000 - val_loss: 1.0633 - val_acc: 0.7812 Epoch 245/250 3/3 [==============================] - 1s 189ms/step - loss: 0.0278 - acc: 0.9896 - val_loss: 1.0611 - val_acc: 0.7812 Epoch 246/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0089 - acc: 1.0000 - val_loss: 1.0657 - val_acc: 0.7812 Epoch 247/250 3/3 [==============================] - 1s 190ms/step - loss: 0.0186 - acc: 1.0000 - val_loss: 1.0674 - val_acc: 0.7812 Epoch 248/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0150 - acc: 0.9893 - val_loss: 1.0664 - val_acc: 0.7812 Epoch 249/250 3/3 [==============================] - 1s 187ms/step - loss: 0.0656 - acc: 0.9575 - val_loss: 1.0677 - val_acc: 0.7812 Epoch 250/250 3/3 [==============================] - 1s 188ms/step - loss: 0.0123 - acc: 1.0000 - val_loss: 1.0653 - val_acc: 0.7812
model.save("cam_grid_prostate_resnet50_7812.h5")
from keras.models import load_model
model = load_model('cam_grid_prostate_resnet50_7812.h5')
model.summary()
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_1 (InputLayer) (None, 224, 224, 1) 0
__________________________________________________________________________________________________
conv1_pad (ZeroPadding2D) (None, 230, 230, 1) 0 input_1[0][0]
__________________________________________________________________________________________________
conv1 (Conv2D) (None, 112, 112, 64) 3200 conv1_pad[0][0]
__________________________________________________________________________________________________
bn_conv1 (BatchNormalization) (None, 112, 112, 64) 256 conv1[0][0]
__________________________________________________________________________________________________
activation_1 (Activation) (None, 112, 112, 64) 0 bn_conv1[0][0]
__________________________________________________________________________________________________
pool1_pad (ZeroPadding2D) (None, 114, 114, 64) 0 activation_1[0][0]
__________________________________________________________________________________________________
max_pooling2d_1 (MaxPooling2D) (None, 56, 56, 64) 0 pool1_pad[0][0]
__________________________________________________________________________________________________
res2a_branch2a (Conv2D) (None, 56, 56, 64) 4160 max_pooling2d_1[0][0]
__________________________________________________________________________________________________
bn2a_branch2a (BatchNormalizati (None, 56, 56, 64) 256 res2a_branch2a[0][0]
__________________________________________________________________________________________________
activation_2 (Activation) (None, 56, 56, 64) 0 bn2a_branch2a[0][0]
__________________________________________________________________________________________________
res2a_branch2b (Conv2D) (None, 56, 56, 64) 36928 activation_2[0][0]
__________________________________________________________________________________________________
bn2a_branch2b (BatchNormalizati (None, 56, 56, 64) 256 res2a_branch2b[0][0]
__________________________________________________________________________________________________
activation_3 (Activation) (None, 56, 56, 64) 0 bn2a_branch2b[0][0]
__________________________________________________________________________________________________
res2a_branch2c (Conv2D) (None, 56, 56, 256) 16640 activation_3[0][0]
__________________________________________________________________________________________________
res2a_branch1 (Conv2D) (None, 56, 56, 256) 16640 max_pooling2d_1[0][0]
__________________________________________________________________________________________________
bn2a_branch2c (BatchNormalizati (None, 56, 56, 256) 1024 res2a_branch2c[0][0]
__________________________________________________________________________________________________
bn2a_branch1 (BatchNormalizatio (None, 56, 56, 256) 1024 res2a_branch1[0][0]
__________________________________________________________________________________________________
add_1 (Add) (None, 56, 56, 256) 0 bn2a_branch2c[0][0]
bn2a_branch1[0][0]
__________________________________________________________________________________________________
activation_4 (Activation) (None, 56, 56, 256) 0 add_1[0][0]
__________________________________________________________________________________________________
res2b_branch2a (Conv2D) (None, 56, 56, 64) 16448 activation_4[0][0]
__________________________________________________________________________________________________
bn2b_branch2a (BatchNormalizati (None, 56, 56, 64) 256 res2b_branch2a[0][0]
__________________________________________________________________________________________________
activation_5 (Activation) (None, 56, 56, 64) 0 bn2b_branch2a[0][0]
__________________________________________________________________________________________________
res2b_branch2b (Conv2D) (None, 56, 56, 64) 36928 activation_5[0][0]
__________________________________________________________________________________________________
bn2b_branch2b (BatchNormalizati (None, 56, 56, 64) 256 res2b_branch2b[0][0]
__________________________________________________________________________________________________
activation_6 (Activation) (None, 56, 56, 64) 0 bn2b_branch2b[0][0]
__________________________________________________________________________________________________
res2b_branch2c (Conv2D) (None, 56, 56, 256) 16640 activation_6[0][0]
__________________________________________________________________________________________________
bn2b_branch2c (BatchNormalizati (None, 56, 56, 256) 1024 res2b_branch2c[0][0]
__________________________________________________________________________________________________
add_2 (Add) (None, 56, 56, 256) 0 bn2b_branch2c[0][0]
activation_4[0][0]
__________________________________________________________________________________________________
activation_7 (Activation) (None, 56, 56, 256) 0 add_2[0][0]
__________________________________________________________________________________________________
res2c_branch2a (Conv2D) (None, 56, 56, 64) 16448 activation_7[0][0]
__________________________________________________________________________________________________
bn2c_branch2a (BatchNormalizati (None, 56, 56, 64) 256 res2c_branch2a[0][0]
__________________________________________________________________________________________________
activation_8 (Activation) (None, 56, 56, 64) 0 bn2c_branch2a[0][0]
__________________________________________________________________________________________________
res2c_branch2b (Conv2D) (None, 56, 56, 64) 36928 activation_8[0][0]
__________________________________________________________________________________________________
bn2c_branch2b (BatchNormalizati (None, 56, 56, 64) 256 res2c_branch2b[0][0]
__________________________________________________________________________________________________
activation_9 (Activation) (None, 56, 56, 64) 0 bn2c_branch2b[0][0]
__________________________________________________________________________________________________
res2c_branch2c (Conv2D) (None, 56, 56, 256) 16640 activation_9[0][0]
__________________________________________________________________________________________________
bn2c_branch2c (BatchNormalizati (None, 56, 56, 256) 1024 res2c_branch2c[0][0]
__________________________________________________________________________________________________
add_3 (Add) (None, 56, 56, 256) 0 bn2c_branch2c[0][0]
activation_7[0][0]
__________________________________________________________________________________________________
activation_10 (Activation) (None, 56, 56, 256) 0 add_3[0][0]
__________________________________________________________________________________________________
res3a_branch2a (Conv2D) (None, 28, 28, 128) 32896 activation_10[0][0]
__________________________________________________________________________________________________
bn3a_branch2a (BatchNormalizati (None, 28, 28, 128) 512 res3a_branch2a[0][0]
__________________________________________________________________________________________________
activation_11 (Activation) (None, 28, 28, 128) 0 bn3a_branch2a[0][0]
__________________________________________________________________________________________________
res3a_branch2b (Conv2D) (None, 28, 28, 128) 147584 activation_11[0][0]
__________________________________________________________________________________________________
bn3a_branch2b (BatchNormalizati (None, 28, 28, 128) 512 res3a_branch2b[0][0]
__________________________________________________________________________________________________
activation_12 (Activation) (None, 28, 28, 128) 0 bn3a_branch2b[0][0]
__________________________________________________________________________________________________
res3a_branch2c (Conv2D) (None, 28, 28, 512) 66048 activation_12[0][0]
__________________________________________________________________________________________________
res3a_branch1 (Conv2D) (None, 28, 28, 512) 131584 activation_10[0][0]
__________________________________________________________________________________________________
bn3a_branch2c (BatchNormalizati (None, 28, 28, 512) 2048 res3a_branch2c[0][0]
__________________________________________________________________________________________________
bn3a_branch1 (BatchNormalizatio (None, 28, 28, 512) 2048 res3a_branch1[0][0]
__________________________________________________________________________________________________
add_4 (Add) (None, 28, 28, 512) 0 bn3a_branch2c[0][0]
bn3a_branch1[0][0]
__________________________________________________________________________________________________
activation_13 (Activation) (None, 28, 28, 512) 0 add_4[0][0]
__________________________________________________________________________________________________
res3b_branch2a (Conv2D) (None, 28, 28, 128) 65664 activation_13[0][0]
__________________________________________________________________________________________________
bn3b_branch2a (BatchNormalizati (None, 28, 28, 128) 512 res3b_branch2a[0][0]
__________________________________________________________________________________________________
activation_14 (Activation) (None, 28, 28, 128) 0 bn3b_branch2a[0][0]
__________________________________________________________________________________________________
res3b_branch2b (Conv2D) (None, 28, 28, 128) 147584 activation_14[0][0]
__________________________________________________________________________________________________
bn3b_branch2b (BatchNormalizati (None, 28, 28, 128) 512 res3b_branch2b[0][0]
__________________________________________________________________________________________________
activation_15 (Activation) (None, 28, 28, 128) 0 bn3b_branch2b[0][0]
__________________________________________________________________________________________________
res3b_branch2c (Conv2D) (None, 28, 28, 512) 66048 activation_15[0][0]
__________________________________________________________________________________________________
bn3b_branch2c (BatchNormalizati (None, 28, 28, 512) 2048 res3b_branch2c[0][0]
__________________________________________________________________________________________________
add_5 (Add) (None, 28, 28, 512) 0 bn3b_branch2c[0][0]
activation_13[0][0]
__________________________________________________________________________________________________
activation_16 (Activation) (None, 28, 28, 512) 0 add_5[0][0]
__________________________________________________________________________________________________
res3c_branch2a (Conv2D) (None, 28, 28, 128) 65664 activation_16[0][0]
__________________________________________________________________________________________________
bn3c_branch2a (BatchNormalizati (None, 28, 28, 128) 512 res3c_branch2a[0][0]
__________________________________________________________________________________________________
activation_17 (Activation) (None, 28, 28, 128) 0 bn3c_branch2a[0][0]
__________________________________________________________________________________________________
res3c_branch2b (Conv2D) (None, 28, 28, 128) 147584 activation_17[0][0]
__________________________________________________________________________________________________
bn3c_branch2b (BatchNormalizati (None, 28, 28, 128) 512 res3c_branch2b[0][0]
__________________________________________________________________________________________________
activation_18 (Activation) (None, 28, 28, 128) 0 bn3c_branch2b[0][0]
__________________________________________________________________________________________________
res3c_branch2c (Conv2D) (None, 28, 28, 512) 66048 activation_18[0][0]
__________________________________________________________________________________________________
bn3c_branch2c (BatchNormalizati (None, 28, 28, 512) 2048 res3c_branch2c[0][0]
__________________________________________________________________________________________________
add_6 (Add) (None, 28, 28, 512) 0 bn3c_branch2c[0][0]
activation_16[0][0]
__________________________________________________________________________________________________
activation_19 (Activation) (None, 28, 28, 512) 0 add_6[0][0]
__________________________________________________________________________________________________
res3d_branch2a (Conv2D) (None, 28, 28, 128) 65664 activation_19[0][0]
__________________________________________________________________________________________________
bn3d_branch2a (BatchNormalizati (None, 28, 28, 128) 512 res3d_branch2a[0][0]
__________________________________________________________________________________________________
activation_20 (Activation) (None, 28, 28, 128) 0 bn3d_branch2a[0][0]
__________________________________________________________________________________________________
res3d_branch2b (Conv2D) (None, 28, 28, 128) 147584 activation_20[0][0]
__________________________________________________________________________________________________
bn3d_branch2b (BatchNormalizati (None, 28, 28, 128) 512 res3d_branch2b[0][0]
__________________________________________________________________________________________________
activation_21 (Activation) (None, 28, 28, 128) 0 bn3d_branch2b[0][0]
__________________________________________________________________________________________________
res3d_branch2c (Conv2D) (None, 28, 28, 512) 66048 activation_21[0][0]
__________________________________________________________________________________________________
bn3d_branch2c (BatchNormalizati (None, 28, 28, 512) 2048 res3d_branch2c[0][0]
__________________________________________________________________________________________________
add_7 (Add) (None, 28, 28, 512) 0 bn3d_branch2c[0][0]
activation_19[0][0]
__________________________________________________________________________________________________
activation_22 (Activation) (None, 28, 28, 512) 0 add_7[0][0]
__________________________________________________________________________________________________
res4a_branch2a (Conv2D) (None, 14, 14, 256) 131328 activation_22[0][0]
__________________________________________________________________________________________________
bn4a_branch2a (BatchNormalizati (None, 14, 14, 256) 1024 res4a_branch2a[0][0]
__________________________________________________________________________________________________
activation_23 (Activation) (None, 14, 14, 256) 0 bn4a_branch2a[0][0]
__________________________________________________________________________________________________
res4a_branch2b (Conv2D) (None, 14, 14, 256) 590080 activation_23[0][0]
__________________________________________________________________________________________________
bn4a_branch2b (BatchNormalizati (None, 14, 14, 256) 1024 res4a_branch2b[0][0]
__________________________________________________________________________________________________
activation_24 (Activation) (None, 14, 14, 256) 0 bn4a_branch2b[0][0]
__________________________________________________________________________________________________
res4a_branch2c (Conv2D) (None, 14, 14, 1024) 263168 activation_24[0][0]
__________________________________________________________________________________________________
res4a_branch1 (Conv2D) (None, 14, 14, 1024) 525312 activation_22[0][0]
__________________________________________________________________________________________________
bn4a_branch2c (BatchNormalizati (None, 14, 14, 1024) 4096 res4a_branch2c[0][0]
__________________________________________________________________________________________________
bn4a_branch1 (BatchNormalizatio (None, 14, 14, 1024) 4096 res4a_branch1[0][0]
__________________________________________________________________________________________________
add_8 (Add) (None, 14, 14, 1024) 0 bn4a_branch2c[0][0]
bn4a_branch1[0][0]
__________________________________________________________________________________________________
activation_25 (Activation) (None, 14, 14, 1024) 0 add_8[0][0]
__________________________________________________________________________________________________
res4b_branch2a (Conv2D) (None, 14, 14, 256) 262400 activation_25[0][0]
__________________________________________________________________________________________________
bn4b_branch2a (BatchNormalizati (None, 14, 14, 256) 1024 res4b_branch2a[0][0]
__________________________________________________________________________________________________
activation_26 (Activation) (None, 14, 14, 256) 0 bn4b_branch2a[0][0]
__________________________________________________________________________________________________
res4b_branch2b (Conv2D) (None, 14, 14, 256) 590080 activation_26[0][0]
__________________________________________________________________________________________________
bn4b_branch2b (BatchNormalizati (None, 14, 14, 256) 1024 res4b_branch2b[0][0]
__________________________________________________________________________________________________
activation_27 (Activation) (None, 14, 14, 256) 0 bn4b_branch2b[0][0]
__________________________________________________________________________________________________
res4b_branch2c (Conv2D) (None, 14, 14, 1024) 263168 activation_27[0][0]
__________________________________________________________________________________________________
bn4b_branch2c (BatchNormalizati (None, 14, 14, 1024) 4096 res4b_branch2c[0][0]
__________________________________________________________________________________________________
add_9 (Add) (None, 14, 14, 1024) 0 bn4b_branch2c[0][0]
activation_25[0][0]
__________________________________________________________________________________________________
activation_28 (Activation) (None, 14, 14, 1024) 0 add_9[0][0]
__________________________________________________________________________________________________
res4c_branch2a (Conv2D) (None, 14, 14, 256) 262400 activation_28[0][0]
__________________________________________________________________________________________________
bn4c_branch2a (BatchNormalizati (None, 14, 14, 256) 1024 res4c_branch2a[0][0]
__________________________________________________________________________________________________
activation_29 (Activation) (None, 14, 14, 256) 0 bn4c_branch2a[0][0]
__________________________________________________________________________________________________
res4c_branch2b (Conv2D) (None, 14, 14, 256) 590080 activation_29[0][0]
__________________________________________________________________________________________________
bn4c_branch2b (BatchNormalizati (None, 14, 14, 256) 1024 res4c_branch2b[0][0]
__________________________________________________________________________________________________
activation_30 (Activation) (None, 14, 14, 256) 0 bn4c_branch2b[0][0]
__________________________________________________________________________________________________
res4c_branch2c (Conv2D) (None, 14, 14, 1024) 263168 activation_30[0][0]
__________________________________________________________________________________________________
bn4c_branch2c (BatchNormalizati (None, 14, 14, 1024) 4096 res4c_branch2c[0][0]
__________________________________________________________________________________________________
add_10 (Add) (None, 14, 14, 1024) 0 bn4c_branch2c[0][0]
activation_28[0][0]
__________________________________________________________________________________________________
activation_31 (Activation) (None, 14, 14, 1024) 0 add_10[0][0]
__________________________________________________________________________________________________
res4d_branch2a (Conv2D) (None, 14, 14, 256) 262400 activation_31[0][0]
__________________________________________________________________________________________________
bn4d_branch2a (BatchNormalizati (None, 14, 14, 256) 1024 res4d_branch2a[0][0]
__________________________________________________________________________________________________
activation_32 (Activation) (None, 14, 14, 256) 0 bn4d_branch2a[0][0]
__________________________________________________________________________________________________
res4d_branch2b (Conv2D) (None, 14, 14, 256) 590080 activation_32[0][0]
__________________________________________________________________________________________________
bn4d_branch2b (BatchNormalizati (None, 14, 14, 256) 1024 res4d_branch2b[0][0]
__________________________________________________________________________________________________
activation_33 (Activation) (None, 14, 14, 256) 0 bn4d_branch2b[0][0]
__________________________________________________________________________________________________
res4d_branch2c (Conv2D) (None, 14, 14, 1024) 263168 activation_33[0][0]
__________________________________________________________________________________________________
bn4d_branch2c (BatchNormalizati (None, 14, 14, 1024) 4096 res4d_branch2c[0][0]
__________________________________________________________________________________________________
add_11 (Add) (None, 14, 14, 1024) 0 bn4d_branch2c[0][0]
activation_31[0][0]
__________________________________________________________________________________________________
activation_34 (Activation) (None, 14, 14, 1024) 0 add_11[0][0]
__________________________________________________________________________________________________
res4e_branch2a (Conv2D) (None, 14, 14, 256) 262400 activation_34[0][0]
__________________________________________________________________________________________________
bn4e_branch2a (BatchNormalizati (None, 14, 14, 256) 1024 res4e_branch2a[0][0]
__________________________________________________________________________________________________
activation_35 (Activation) (None, 14, 14, 256) 0 bn4e_branch2a[0][0]
__________________________________________________________________________________________________
res4e_branch2b (Conv2D) (None, 14, 14, 256) 590080 activation_35[0][0]
__________________________________________________________________________________________________
bn4e_branch2b (BatchNormalizati (None, 14, 14, 256) 1024 res4e_branch2b[0][0]
__________________________________________________________________________________________________
activation_36 (Activation) (None, 14, 14, 256) 0 bn4e_branch2b[0][0]
__________________________________________________________________________________________________
res4e_branch2c (Conv2D) (None, 14, 14, 1024) 263168 activation_36[0][0]
__________________________________________________________________________________________________
bn4e_branch2c (BatchNormalizati (None, 14, 14, 1024) 4096 res4e_branch2c[0][0]
__________________________________________________________________________________________________
add_12 (Add) (None, 14, 14, 1024) 0 bn4e_branch2c[0][0]
activation_34[0][0]
__________________________________________________________________________________________________
activation_37 (Activation) (None, 14, 14, 1024) 0 add_12[0][0]
__________________________________________________________________________________________________
res4f_branch2a (Conv2D) (None, 14, 14, 256) 262400 activation_37[0][0]
__________________________________________________________________________________________________
bn4f_branch2a (BatchNormalizati (None, 14, 14, 256) 1024 res4f_branch2a[0][0]
__________________________________________________________________________________________________
activation_38 (Activation) (None, 14, 14, 256) 0 bn4f_branch2a[0][0]
__________________________________________________________________________________________________
res4f_branch2b (Conv2D) (None, 14, 14, 256) 590080 activation_38[0][0]
__________________________________________________________________________________________________
bn4f_branch2b (BatchNormalizati (None, 14, 14, 256) 1024 res4f_branch2b[0][0]
__________________________________________________________________________________________________
activation_39 (Activation) (None, 14, 14, 256) 0 bn4f_branch2b[0][0]
__________________________________________________________________________________________________
res4f_branch2c (Conv2D) (None, 14, 14, 1024) 263168 activation_39[0][0]
__________________________________________________________________________________________________
bn4f_branch2c (BatchNormalizati (None, 14, 14, 1024) 4096 res4f_branch2c[0][0]
__________________________________________________________________________________________________
add_13 (Add) (None, 14, 14, 1024) 0 bn4f_branch2c[0][0]
activation_37[0][0]
__________________________________________________________________________________________________
activation_40 (Activation) (None, 14, 14, 1024) 0 add_13[0][0]
__________________________________________________________________________________________________
res5a_branch2a (Conv2D) (None, 7, 7, 512) 524800 activation_40[0][0]
__________________________________________________________________________________________________
bn5a_branch2a (BatchNormalizati (None, 7, 7, 512) 2048 res5a_branch2a[0][0]
__________________________________________________________________________________________________
activation_41 (Activation) (None, 7, 7, 512) 0 bn5a_branch2a[0][0]
__________________________________________________________________________________________________
res5a_branch2b (Conv2D) (None, 7, 7, 512) 2359808 activation_41[0][0]
__________________________________________________________________________________________________
bn5a_branch2b (BatchNormalizati (None, 7, 7, 512) 2048 res5a_branch2b[0][0]
__________________________________________________________________________________________________
activation_42 (Activation) (None, 7, 7, 512) 0 bn5a_branch2b[0][0]
__________________________________________________________________________________________________
res5a_branch2c (Conv2D) (None, 7, 7, 2048) 1050624 activation_42[0][0]
__________________________________________________________________________________________________
res5a_branch1 (Conv2D) (None, 7, 7, 2048) 2099200 activation_40[0][0]
__________________________________________________________________________________________________
bn5a_branch2c (BatchNormalizati (None, 7, 7, 2048) 8192 res5a_branch2c[0][0]
__________________________________________________________________________________________________
bn5a_branch1 (BatchNormalizatio (None, 7, 7, 2048) 8192 res5a_branch1[0][0]
__________________________________________________________________________________________________
add_14 (Add) (None, 7, 7, 2048) 0 bn5a_branch2c[0][0]
bn5a_branch1[0][0]
__________________________________________________________________________________________________
activation_43 (Activation) (None, 7, 7, 2048) 0 add_14[0][0]
__________________________________________________________________________________________________
res5b_branch2a (Conv2D) (None, 7, 7, 512) 1049088 activation_43[0][0]
__________________________________________________________________________________________________
bn5b_branch2a (BatchNormalizati (None, 7, 7, 512) 2048 res5b_branch2a[0][0]
__________________________________________________________________________________________________
activation_44 (Activation) (None, 7, 7, 512) 0 bn5b_branch2a[0][0]
__________________________________________________________________________________________________
res5b_branch2b (Conv2D) (None, 7, 7, 512) 2359808 activation_44[0][0]
__________________________________________________________________________________________________
bn5b_branch2b (BatchNormalizati (None, 7, 7, 512) 2048 res5b_branch2b[0][0]
__________________________________________________________________________________________________
activation_45 (Activation) (None, 7, 7, 512) 0 bn5b_branch2b[0][0]
__________________________________________________________________________________________________
res5b_branch2c (Conv2D) (None, 7, 7, 2048) 1050624 activation_45[0][0]
__________________________________________________________________________________________________
bn5b_branch2c (BatchNormalizati (None, 7, 7, 2048) 8192 res5b_branch2c[0][0]
__________________________________________________________________________________________________
add_15 (Add) (None, 7, 7, 2048) 0 bn5b_branch2c[0][0]
activation_43[0][0]
__________________________________________________________________________________________________
activation_46 (Activation) (None, 7, 7, 2048) 0 add_15[0][0]
__________________________________________________________________________________________________
res5c_branch2a (Conv2D) (None, 7, 7, 512) 1049088 activation_46[0][0]
__________________________________________________________________________________________________
bn5c_branch2a (BatchNormalizati (None, 7, 7, 512) 2048 res5c_branch2a[0][0]
__________________________________________________________________________________________________
activation_47 (Activation) (None, 7, 7, 512) 0 bn5c_branch2a[0][0]
__________________________________________________________________________________________________
res5c_branch2b (Conv2D) (None, 7, 7, 512) 2359808 activation_47[0][0]
__________________________________________________________________________________________________
bn5c_branch2b (BatchNormalizati (None, 7, 7, 512) 2048 res5c_branch2b[0][0]
__________________________________________________________________________________________________
activation_48 (Activation) (None, 7, 7, 512) 0 bn5c_branch2b[0][0]
__________________________________________________________________________________________________
res5c_branch2c (Conv2D) (None, 7, 7, 2048) 1050624 activation_48[0][0]
__________________________________________________________________________________________________
bn5c_branch2c (BatchNormalizati (None, 7, 7, 2048) 8192 res5c_branch2c[0][0]
__________________________________________________________________________________________________
add_16 (Add) (None, 7, 7, 2048) 0 bn5c_branch2c[0][0]
activation_46[0][0]
__________________________________________________________________________________________________
activation_49 (Activation) (None, 7, 7, 2048) 0 add_16[0][0]
__________________________________________________________________________________________________
activation_50 (Activation) (None, 7, 7, 2048) 0 activation_49[0][0]
__________________________________________________________________________________________________
batch_normalization_1 (BatchNor (None, 7, 7, 2048) 8192 activation_50[0][0]
__________________________________________________________________________________________________
average_pooling2d_1 (AveragePoo (None, 3, 3, 2048) 0 batch_normalization_1[0][0]
__________________________________________________________________________________________________
flatten_1 (Flatten) (None, 18432) 0 average_pooling2d_1[0][0]
__________________________________________________________________________________________________
dense_1 (Dense) (None, 2) 36866 flatten_1[0][0]
==================================================================================================
Total params: 23,626,498
Trainable params: 23,569,282
Non-trainable params: 57,216
__________________________________________________________________________________________________
from vis.visualization import visualize_saliency,overlay
from vis.utils import utils
from keras import activations
from vis.visualization import visualize_cam
import matplotlib.cm as cm
# Utility to search for layer index by name.
# Alternatively we can specify this as -1 since it corresponds to the last layer.
# layer_idx = utils.find_layer_idx(model, 0)
# Swap softmax with linear
# model.summary()
layer_idx = utils.find_layer_idx(model, 'dense_1')
model.layers[layer_idx].activation = activations.linear
model = utils.apply_modifications(model)
conv_list = np.array([2, 7, 10, 13, 14, 19, 22, 25, 29, 32, 35, 39, 42, 45, 46, 51, 54, 57, 61,
64, 68, 71, 74, 78, 81, 84, 85, 90, 93, 96, 100, 103, 106, 110, 113, 116,
120, 123, 126, 130, 133, 136, 140, 143, 146, 147, 152, 155, 158, 162, 165, 168])
# img=cv2.imread(true_imgs[index],0)
# img=cv2.imread('./Result/False/2.png',0)
# img=cv2.imread('./x_pic/False/1.png',0)
img=cv2.imread('./True/HU_LI_YANG_100257720_slice_013_0.png',0)
# print(true_imgs[index])
# img=cv2.imread('./Predict_Img/3.png',0)
img=cv2.resize(img,(224,224))
print (img.shape)
img=np.reshape(img,(224,224,1))
print (img.shape)
for i in conv_list:
print(i, model.layers[i], '\n')
index=13
for t in range(2):
# for t in range(1,30):
# print (t)
grads = visualize_cam(model, layer_idx,filter_indices=t, penultimate_layer_idx=i, seed_input=img, backprop_modifier='relu')
#grads = visualize_saliency(model, layer_idx,filter_indices=t, seed_input=img, backprop_modifier='relu')
# print (grads.shape)
# Plot with 'jet' colormap to visualize as a heatmap.
# print (img.shape)
print('this is layer ', i, '\n')
plt.imshow(img[...,0],cmap=plt.cm.gray)
plt.show()
jet_heatmap = np.uint8(cm.jet(grads)[..., :3] * 255)
# plt.imshow(overlay(jet_heatmap, x_val_pic[1]))
plt.imshow(grads, cmap='jet')
plt.show()
from scipy.misc import imsave
imsave('./Result/G1.jpg',grads)
# penultimate_layer = utils.find_layer_idx(model, 'conv1')
# print(penultimate_layer)
# model = utils.apply_modifications(model)
(224, 224) (224, 224, 1) 2 <keras.layers.convolutional.Conv2D object at 0x7fe42886ba58> this is layer 2
this is layer 2
7 <keras.layers.convolutional.Conv2D object at 0x7fe42886bbe0> this is layer 7
this is layer 7
10 <keras.layers.convolutional.Conv2D object at 0x7fe42886bf28> this is layer 10
this is layer 10
13 <keras.layers.convolutional.Conv2D object at 0x7fe428863278> this is layer 13
this is layer 13
14 <keras.layers.convolutional.Conv2D object at 0x7fe428863400> this is layer 14
this is layer 14
19 <keras.layers.convolutional.Conv2D object at 0x7fe428863828> this is layer 19
this is layer 19
22 <keras.layers.convolutional.Conv2D object at 0x7fe428863b00> this is layer 22
this is layer 22
25 <keras.layers.convolutional.Conv2D object at 0x7fe428863dd8> this is layer 25
this is layer 25
29 <keras.layers.convolutional.Conv2D object at 0x7fe42884a128> this is layer 29
this is layer 29
32 <keras.layers.convolutional.Conv2D object at 0x7fe42884a400> this is layer 32
this is layer 32
35 <keras.layers.convolutional.Conv2D object at 0x7fe42884a6d8> this is layer 35
this is layer 35
39 <keras.layers.convolutional.Conv2D object at 0x7fe42884a9e8> this is layer 39
this is layer 39
42 <keras.layers.convolutional.Conv2D object at 0x7fe42884acc0> this is layer 42
this is layer 42
45 <keras.layers.convolutional.Conv2D object at 0x7fe42884af98> this is layer 45
this is layer 45
46 <keras.layers.convolutional.Conv2D object at 0x7fe42884e160> this is layer 46
this is layer 46
51 <keras.layers.convolutional.Conv2D object at 0x7fe42884e588> this is layer 51
this is layer 51
54 <keras.layers.convolutional.Conv2D object at 0x7fe42884e860> this is layer 54
this is layer 54
57 <keras.layers.convolutional.Conv2D object at 0x7fe42884eb38> this is layer 57
this is layer 57
61 <keras.layers.convolutional.Conv2D object at 0x7fe42884ee48> this is layer 61
this is layer 61
64 <keras.layers.convolutional.Conv2D object at 0x7fe42733d160> this is layer 64
this is layer 64
68 <keras.layers.normalization.BatchNormalization object at 0x7fe42733d5c0> this is layer 68
this is layer 68
71 <keras.layers.convolutional.Conv2D object at 0x7fe42733d748> this is layer 71
this is layer 71
74 <keras.layers.convolutional.Conv2D object at 0x7fe42733da20> this is layer 74
this is layer 74
78 <keras.layers.normalization.BatchNormalization object at 0x7fe42733de80> this is layer 78
this is layer 78
81 <keras.layers.convolutional.Conv2D object at 0x7fe427336048> this is layer 81
this is layer 81
84 <keras.layers.convolutional.Conv2D object at 0x7fe427336320> this is layer 84
this is layer 84
85 <keras.layers.normalization.BatchNormalization object at 0x7fe4273364a8> this is layer 85
this is layer 85
90 <keras.layers.normalization.BatchNormalization object at 0x7fe427336a20> this is layer 90
this is layer 90
93 <keras.layers.convolutional.Conv2D object at 0x7fe427336ba8> this is layer 93
this is layer 93
96 <keras.layers.convolutional.Conv2D object at 0x7fe427336e80> this is layer 96
this is layer 96
100 <keras.layers.normalization.BatchNormalization object at 0x7fe42733a320> this is layer 100
this is layer 100
103 <keras.layers.convolutional.Conv2D object at 0x7fe42733a4a8> this is layer 103
this is layer 103
106 <keras.layers.convolutional.Conv2D object at 0x7fe42733a780> this is layer 106
this is layer 106
110 <keras.layers.normalization.BatchNormalization object at 0x7fe42733abe0> this is layer 110
this is layer 110
113 <keras.layers.convolutional.Conv2D object at 0x7fe42733ad68> this is layer 113
this is layer 113
116 <keras.layers.convolutional.Conv2D object at 0x7fe42731a080> this is layer 116
this is layer 116
120 <keras.layers.normalization.BatchNormalization object at 0x7fe42731a4e0> this is layer 120
this is layer 120
123 <keras.layers.convolutional.Conv2D object at 0x7fe42731a668> this is layer 123
this is layer 123
126 <keras.layers.convolutional.Conv2D object at 0x7fe42731a940> this is layer 126
this is layer 126
130 <keras.layers.normalization.BatchNormalization object at 0x7fe42731ada0> this is layer 130
this is layer 130
133 <keras.layers.convolutional.Conv2D object at 0x7fe42731af28> this is layer 133
this is layer 133
136 <keras.layers.convolutional.Conv2D object at 0x7fe42731f208> this is layer 136
this is layer 136
140 <keras.layers.normalization.BatchNormalization object at 0x7fe42731f630> this is layer 140
this is layer 140
143 <keras.layers.convolutional.Conv2D object at 0x7fe42731f828> this is layer 143
this is layer 143
146 <keras.layers.convolutional.Conv2D object at 0x7fe42731fb00> this is layer 146
this is layer 146
147 <keras.layers.normalization.BatchNormalization object at 0x7fe42731fb38> this is layer 147
this is layer 147
152 <keras.layers.normalization.BatchNormalization object at 0x7fe427323240> this is layer 152
this is layer 152
155 <keras.layers.convolutional.Conv2D object at 0x7fe4273233c8> this is layer 155
this is layer 155
158 <keras.layers.convolutional.Conv2D object at 0x7fe4273236a0> this is layer 158
this is layer 158
162 <keras.layers.normalization.BatchNormalization object at 0x7fe427323b00> this is layer 162
this is layer 162
165 <keras.layers.convolutional.Conv2D object at 0x7fe427323c88> this is layer 165
this is layer 165
168 <keras.layers.convolutional.Conv2D object at 0x7fe427323f60> this is layer 168
this is layer 168